www.gusucode.com > 支持向量机工具箱 - LIBSVM OSU_SVM LS_SVM源码程序 > 支持向量机工具箱 - LIBSVM OSU_SVM LS_SVM\stprtool\linear\anderson\ganders.m

    function [alpha,theta,solution,minr,t,maxerr]=...
   ganders(MI,SG,J,tmax,stopCond,t,alpha,theta)
% GANDERS algorithm solving Generalized Anderson's task.
% [alpha,theta,solution,minr,t,maxerr]=...
%    ganders(MI,SG,J,tmax,rdelta,t,alpha,theta)
%
% GANDERS is implementation of the general algorithm framework that finds 
%   optimal solution of the Generalized Anderson's task (GAT). 
%
%   The GAT solves problem of finding a separationg hyperplane 
%   (alpha'*x = theta) between two classes such that found solution minimizes 
%   the probability of bad classification. Both classes are described in term 
%   of the conditional probability density functions p(x|k), where x is 
%   observation and k is class label (1 for the first and 2 for the second 
%   class). The p(x|k) for both the classes has normal distribution but its 
%   genuine parameters are not know only finite set of possible parameters 
%   is known. 
%
%   The algorithms works iteratively until the change of the solution quality 
%   in two consequential steps is less then given limit rdelta 
%   (radius of the smallest ellipsoid) or until the number of steps, 
%   the algorithm performed, exceeds given limit tmax.
%
% Input:
% (K is number of input parameters, N is dimension of feature space.)
%
% GANDERS(MI,SIGMA,J,tmax,rdelta)
%   MI [NxK] contains K column vectors of mean values MI=[mi_1,mi_2...mi_K],
%      where mi_i is i-th column vector N-by-1. 
%   SIGMA [N,(K*N)] contains K covariance matrices,
%      SIGMA=[sigma_1,sigma_2,...sigma_K], where sigma_i is i-th matrix N-by-N.
%   J [1xK] is vector of class labels J=[label_1,label_2,..,class_K], where
%      label_i is an integer 1 or 2 according to which class the pair 
%      {mi_i,sigma_i} describes.
%   tmax [1x1] is maximal number of steps of algorithm. Default is inf 
%      (it exclude this stop condition).
%   rdelta [1x1] is positive real number (including 0 that exclude this 
%      stop condition) which determines stop condition - it works until
%      the change of solution quality (radius of the smallest ellipsoid)
%      is less than rdelta the algorithm exits.
%
% GANDERS(MI,SIGMA,J,tmax,rdelta,t,alpha,theta) begins from state given by
%   t [1x1] begin step number.
%   alpha [Nx1], theta [1x1] are state variables of the algorithm.
%
% Returns:
%   alpha [Nx1] is normal vector of found separating hyperplane.
%   theta [1x1] is threshold of separating hyperplane (alpha'*x=theta).
%   solution [1x1] is equal to -1 if solution does not exist,
%      is equal to 0 if solution is not found,
%      is equal to 1 if is found.
%   minr [1x1] is radius of the smallest ellipsoid correseponding to the 
%      quality of found solution.
%   t [1x1] is number of steps the algorithm performed.
%   maxerr [1x1] is upper bound of probabillity of bad classification.
%
% See also OANDERS, EANDERS, GGANDERS, GANDERS2
%

% Statistical Pattern Recognition Toolbox, Vojtech Franc, Vaclav Hlavac
% (c) Czech Technical University Prague, http://cmp.felk.cvut.cz
% Written Vojtech Franc (diploma thesis) 24.10.1999, 6.5.2000
% Modifications
% 30-August-2001, V.Franc, mistake repared, dalpha = dalpha/norm(dalpha) removed
% 24. 6.00 V. Hlavac, comments polished.

MINEPS_TMAX=1e2;    % max # of iter. in epsilon minimization

% default arguments setting
if nargin < 3, 
  error('Not enought input arguments.');
end
if nargin < 4,
   tmax = inf;
end

%%% Gets stop condition
if nargin < 5,
   stopCond = 0;
end
if length(stopCond)==2,
  stopT=stopCond(2);
else
  stopT=1;
end
deltaR=stopCond(1);

%##debug
cond=2;
stopT=100;

if nargin < 6,
   t=0;
end

% perform transformation
if nargin < 8,
   [alpha,MI,SG]=ctransf(0,0,MI,J,SG);
else
   [alpha,MI,SG]=ctransf(alpha,theta,MI,J,SG);
end

% get dimension N and number of distributions
N=size(MI,1);
K=size(MI,2);

% STEP (1)
if t==0,
   [alpha,alphaexists]=csvm(MI);

   % if alpha don`t exist than error can not be less that 50%
   if alphaexists==1,
      t=1;
   else
      % no feasible solution can be found, so that exit algorithm
      alpha=zeros(N,1); theta=0; minr=0;
      solution=-1;
      return;
   end
   tmax=tmax-1;
end

% STEP (2.1)
% find the minimal radius of all the  ellipsoids
%[minrs,minri]=min( (alpha'*MI)./sqrt( reshape(alpha'*SG,N,K)'*alpha )' );
%minr=minrs(1);

rka=(alpha'*MI)./sqrt( reshape(alpha'*SG,N,K)'*alpha )';
[minr,inx]=min(rka);
minri=find((rka-0.0001) <= minr);

lastminr=minr;

queueMinR=[];  % queue of the best solutions, stopT steps backward
t0=0;   % counter of performed iterations in this call of the function
% t, is the whole number of iterations, i.e. t(end_fce)=t(begin_fce)+t0;

% iterations cycle
solution=0;
while solution==0 & tmax > 0 & t ~= 0,
   tmax = tmax-1;

   % STEP (2.2)
   % compute contact points and negative error function derivation
   Y0=zeros(N,length(minri));

   for i=1:length(minri),
      j=minri(i);
     % computes point of contact
      x0=MI(:,j)-(( alpha'*MI(:,j) )/...
               (alpha'*SG(:,(j-1)*N+1:j*N)*alpha))*SG(:,(j-1)*N+1:j*N)*alpha;
      Y0(:,i) = x0/sqrt( alpha'*SG(:,(j-1)*N+1:N*j)*alpha );
   end

   % find direction delta_alpha in which error decreases
   [dalpha,dalphaexists]=csvm(Y0);
 
%   dalpha=dalpha/norm(dalpha);
   
   if dalphaexists == 1,
 
%      alpha=alpha/norm(alpha);
%      dalpha=dalpha/norm(dalpha);
     
      % STEP (3)
      % find t=arg min max epsilon(alpha+t*dalpha, MI, SG )
       alpha=mineps(MI,SG,alpha,dalpha,MINEPS_TMAX,0);
%      alpha=minepsvl(MI,SG,alpha,dalpha,MINEPS_TMAX,0);
%      alpha=minepsrt(MI,SG,alpha,dalpha)
;

      % find the minimal radius of all the  ellipsoids
%      [minrs,minri]=min( (alpha'*MI)./sqrt( reshape(alpha'*SG,N,K)'*alpha )' );
%      minr=minrs(1);
%      minri

      rka=(alpha'*MI)./sqrt( reshape(alpha'*SG,N,K)'*alpha )';
      [minr,inx]=min(rka);
      minri=find((rka-0.0001) <= minr);
%      minrinx=minri
   end

   %% incereases iteration counter
   t=t+1;
   t0=t0+1;

   %%%% Stop criterion %%%%%%%%%%%%%%%%
   if dalphaexists == 0,
      solution=1;
      t=t-1;
   else % if cond == 1,    
      %%% the second stop condition
      if t0 > stopT,
        if (minr-queueMinR(1)) < deltaR,
	     solution = 1;
         t=t-1;
        end
        % a queue of the best solutions
        queueMinR=[queueMinR(2:end),minr];  
      else
        queueMinR=[queueMinR,minr];
      end
   end

   % store old value of minr
   lastminr=minr;
end

% inverse transformation
[alpha,theta]=ictransf(alpha);

% upper bound of prob. of bad classification
maxerr=1-cdf('norm',minr,0,1);